import ee
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import wxee
import numpy as np
import eemont
import rioxarray
import xarray as xr
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import matplotlib.gridspec as gridspec
import os
import glob
import geemap
import sankee
from tqdm import tqdm
import geopandas as gpd
import matplotlib.pyplot as plt
import numpy as np
import lightgbm as lgb
import rasterio
import rasterstats
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
import pickle
from os import path as op
import rasterio
from rasterio.features import rasterize
from rasterstats.io import bounds_window
# show from rasterio
from rasterio.plot import show
ee.Initialize() wxee.Initialize()
#javascript to python script using geemap
from geemap.conversion import *
=""" // 2014
jsvar s1 = ee.ImageCollection('COPERNICUS/S1_GRD')
.filterBounds(geometry)
.filterDate("2022-1-01","2022-07-28")
//8/7 bad
// Filter speckle noise
var filterSpeckles = function(img) {
var vv = img.select('VV') //select the VV polarization band
//Apply a focal median filter
var vv_smoothed = vv.focal_median(100,'circle','meters').rename('VV_Filtered')
return img.addBands(vv_smoothed) // Add filtered VV band to original image
}
// now generate 12 month image,apply speckle noise and initiate thresholding and export and then add to maplayer using for loop
for (var i = 1; i <= 7; i++) {
var year = '2022';
var s1_month = s1.filter(ee.Filter.calendarRange(i, i, 'month'));
var filtered = s1_month.map(filterSpeckles)
var flooded = filtered.select('VV_Filtered').median().lt(-14).rename('water').selfMask();
// add to maplayer with 0 and 1 color palette white and blue
Map.addLayer(flooded.clip(geometry), {min:0, max:1, palette: ['blue']},'month'+i+'_'+year);
Export.image.toDrive({
image: flooded.clip(geometry),
description: i+'_'+year,
scale: 10,
region: geometry,
maxPixels: 1e13,
fileFormat: 'GeoTIFF',
crs: 'EPSG:32615',
formatOptions: {cloudOptimized: true}
});}"""
geemap.js_snippet_to_py(js)
import ee
import geemap
# 2014
= ee.ImageCollection('COPERNICUS/S1_GRD') \
s1 \
.filterBounds(geometry) "2022-1-01","2022-07-28")
.filterDate(#8/7 bad
# Filter speckle noise
def filterSpeckles(img):
= img.select('VV') #select the VV polarization band
vv #Apply a focal median filter
= vv.focal_median(100,'circle','meters').rename('VV_Filtered')
vv_smoothed return img.addBands(vv_smoothed) # Add filtered VV band to original image
= geemap.Map()
Map # now generate 12 month image,apply speckle noise and initiate thresholding and export and then add to maplayer using for loop
for i in range(1, 7, 1):
= '2022'
year = s1.filter(ee.Filter.calendarRange(i, i, 'month'))
s1_month = s1_month.map(filterSpeckles)
filtered = filtered.select('VV_Filtered').median().lt(-14).rename('water').selfMask()
flooded # add to maplayer with 0 and 1 color palette white and blue
'min':0, 'max':1, 'palette': ['blue']},'month'+i+'_'+year)
Map.addLayer(flooded.clip(geometry), {
Export.image.toDrive({'image': flooded.clip(geometry),
'description': i+'_'+year,
'scale': 10,
'region': geometry,
'maxPixels': 1e13,
'fileFormat': 'GeoTIFF',
'crs': 'EPSG:32615',
True}
formatOptions: {cloudOptimized:
}) Map
#roi=ee.FeatureCollection('users/hafezahmad100/ctg').geometry()
#fc=ee.FeatureCollection('users/hafezahmad100/ctg')
#fc=ee.Geometry.Rectangle([116.2621, 39.8412, 116.4849, 40.01236])
=ee.FeatureCollection('users/hafezahmad100/Oktibbeha').geometry()
roi=ee.FeatureCollection('users/hafezahmad100/deltasundar').geometry()
delta=ee.FeatureCollection('users/hafezahmad100/LMAV').geometry() lmav
=ee.Geometry.Polygon([-91.59861421152112,40.6286019759042,-91.96940278573987,40.3298523646474,-91.16740083261487,40.31309984387848,-90.91746186777112,40.655695086723945,-91.59861421152112,40.6286019759042]) roi
= ee.ImageCollection('LANDSAT/LT05/C01/T1_TOA').filterBounds(delta).select(['B3','B4','B6']).filterDate('2010-01-01', '2010-05-01')
collection = collection.wx.to_time_series()
ts = ts.aggregate_time(frequency="year", reducer=ee.Reducer.median())
yearly = yearly.map(lambda img: img.clip(delta)) monthly_ts_land
=monthly_ts_land.wx.to_xarray(region=delta,scale=75)
ds ds
<xarray.Dataset> Dimensions: (time: 1, y: 1455, x: 2174) Coordinates: * time (time) datetime64[ns] 2010-01-14T04:15:37 * y (y) float64 22.5 22.5 22.5 22.5 22.5 ... 21.52 21.52 21.52 21.52 * x (x) float64 88.42 88.42 88.42 88.42 ... 89.88 89.88 89.88 89.88 Data variables: B3 (time, y, x) float32 nan nan nan nan nan ... nan nan nan nan nan B4 (time, y, x) float32 nan nan nan nan nan ... nan nan nan nan nan B6 (time, y, x) float32 nan nan nan nan nan ... nan nan nan nan nan Attributes: transform: (0.0006737364630896412, 0.0, 88.42049967942141, ... crs: +init=epsg:4326 res: (0.0006737364630896412, 0.0006737364630896412) is_tiled: 1 nodatavals: (-32768.0,) scales: (1.0,) offsets: (0.0,) AREA_OR_POINT: Area TIFFTAG_RESOLUTIONUNIT: 1 (unitless) TIFFTAG_XRESOLUTION: 1 TIFFTAG_YRESOLUTION: 1
ds.SurfaceTemp.plot()
<matplotlib.collections.QuadMesh at 0x2a5bb36ef10>
# calculate the NDVI
'NDVI'] = (ds.B4 - ds.B3) / (ds.B4 + ds.B3)
ds[# find max and min values of NDVI
= ds.NDVI.max()
max_ndvi = ds.NDVI.min()
min_ndvi # print the max and min values
print(max_ndvi, min_ndvi)
# calculate the thermal band BT = (K2 / (ln (K1 / L) + 1)) − 273.15
'BT'] = (607.76/ (np.log(1260.56/ ds.B6) + 1)) - 273.15
ds[# calculate the proportion of NDVI
'PV'] = ((ds.NDVI - -0.71949685) / (0.88091075- min_ndvi))**2
ds[# calculate Emissivity
'Emissivity'] = 0.956 + 0.004 * ds.PV
ds[# calculate the surface temperature
'SurfaceTemp'] = ds.BT /(1+((0.00115 *ds.BT)/1.4388)*np.log(ds.Emissivity))
ds[# print max and min surface temperature and print them
print(ds.SurfaceTemp.max(), ds.BT.min())
<xarray.DataArray 'NDVI' ()>
array(0.88091075) <xarray.DataArray 'NDVI' ()>
array(-0.71949685)
<xarray.DataArray 'SurfaceTemp' ()>
array(-23.36428851) <xarray.DataArray 'BT' ()>
array(-57.96063232)
= (ee.ImageCollection("MODIS/006/MOD09GA")
collection "2021-06-01", "2021-06-05")
.filterDate("sur_refl_b01", "sur_refl_b04" ,"sur_refl_b03"])
.select([
)# NDVI
= wxee.TimeSeries("MODIS/MOD09GA_006_NDVI").filterDate("2000", "2021").select('NDVI')
modis # LST 1000 METER
#modis = wxee.TimeSeries("MODIS/061/MOD11A1").filterDate("2000", "2021").select('LST_Day_1km')
#lst.multyply(0.02).substract(273.15)
# ts = (modis
# .maskClouds(maskShadows=False)
# .scaleAndOffset()
# .spectralIndices(index="NDVI")
# .select("NDVI")
# )
#'year', 'month' 'week', 'day', 'hour'.
= modis.aggregate_time("year", ee.Reducer.median())
monthly_ts monthly_ts
<wxee.time_series.TimeSeries at 0x27f8bd3e400>
# Spatial resolution in CRS units (meters)
= 500
scale = monthly_ts.wx.to_tif(
files ="E:\python_projects\essential_visualization",
out_dir="wx_",
prefix=roi,
region=30,
scale="EPSG:4326"
crs
)
files
['E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2000_02_24.time.20000224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2001_02_24.time.20010224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2002_02_24.time.20020224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2003_02_24.time.20030224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2004_02_24.time.20040224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2005_02_24.time.20050224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2006_02_24.time.20060224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2007_02_24.time.20070224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2008_02_24.time.20080224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2009_02_24.time.20090224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2010_02_24.time.20100224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2011_02_24.time.20110224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2012_02_24.time.20120224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2013_02_24.time.20130224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2014_02_24.time.20140224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2015_02_24.time.20150224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2016_02_28.time.20160228T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2017_02_24.time.20170224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2018_02_24.time.20180224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2019_02_24.time.20190224T000000.tif',
'E:\\python_projects\\essential_visualization\\wx_MODIS_MOD09GA_006_NDVI_2020_02_24.time.20200224T000000.tif']
= wxee.TimeSeries("MODIS/006/MOD09GA").filterDate("2020", "2021") modis
= (modis
ts =False)
.maskClouds(maskShadows
.scaleAndOffset()="NDVI")
.spectralIndices(index"NDVI")
.select( )
ts
<wxee.time_series.TimeSeries at 0x27f88649790>
= ts.aggregate_time("month", ee.Reducer.median())
monthly_ts monthly_ts
<wxee.time_series.TimeSeries at 0x27f886f88e0>
= monthly_ts.map(lambda img: img.clip(roi)) monthly_ts_land
= monthly_ts_land.wx.to_xarray(region=roi, scale=50)
ds ds
<xarray.Dataset> Dimensions: (time: 21, y: 538, x: 318) Coordinates: * time (time) datetime64[ns] 2000-02-24 2001-02-24 ... 2020-02-24 * y (y) float64 22.47 22.47 22.47 22.47 ... 22.23 22.23 22.23 22.23 * x (x) float64 91.75 91.75 91.75 91.75 ... 91.89 91.89 91.89 91.89 Data variables: NDVI (time, y, x) float32 nan nan nan nan nan ... nan nan nan nan nan Attributes: transform: (0.00044915764205976077, 0.0, 91.74808407062115,... crs: +init=epsg:4326 res: (0.00044915764205976077, 0.00044915764205976077) is_tiled: 1 nodatavals: (-32768.0,) scales: (1.0,) offsets: (0.0,) AREA_OR_POINT: Area TIFFTAG_RESOLUTIONUNIT: 1 (unitless) TIFFTAG_XRESOLUTION: 1 TIFFTAG_YRESOLUTION: 1
xarray.Dataset
- time: 21
- y: 538
- x: 318
- time(time)datetime64[ns]2000-02-24 ... 2020-02-24
array(['2000-02-24T00:00:00.000000000', '2001-02-24T00:00:00.000000000', '2002-02-24T00:00:00.000000000', '2003-02-24T00:00:00.000000000', '2004-02-24T00:00:00.000000000', '2005-02-24T00:00:00.000000000', '2006-02-24T00:00:00.000000000', '2007-02-24T00:00:00.000000000', '2008-02-24T00:00:00.000000000', '2009-02-24T00:00:00.000000000', '2010-02-24T00:00:00.000000000', '2011-02-24T00:00:00.000000000', '2012-02-24T00:00:00.000000000', '2013-02-24T00:00:00.000000000', '2014-02-24T00:00:00.000000000', '2015-02-24T00:00:00.000000000', '2016-02-28T00:00:00.000000000', '2017-02-24T00:00:00.000000000', '2018-02-24T00:00:00.000000000', '2019-02-24T00:00:00.000000000', '2020-02-24T00:00:00.000000000'], dtype='datetime64[ns]')
- y(y)float6422.47 22.47 22.47 ... 22.23 22.23
array([22.46709 , 22.466641, 22.466192, ..., 22.22679 , 22.226341, 22.225892])
- x(x)float6491.75 91.75 91.75 ... 91.89 91.89
array([91.748309, 91.748758, 91.749207, ..., 91.889793, 91.890242, 91.890692])
- NDVI(time, y, x)float32nan nan nan nan ... nan nan nan nan
- transform :
- (0.00044915764205976077, 0.0, 91.74808407062115, 0.0, -0.00044915764205976077, 22.467314413471293)
- crs :
- +init=epsg:4326
- res :
- (0.00044915764205976077, 0.00044915764205976077)
- is_tiled :
- 1
- nodatavals :
- (-32768.0,)
- scales :
- (1.0,)
- offsets :
- (0.0,)
- AREA_OR_POINT :
- Area
- TIFFTAG_RESOLUTIONUNIT :
- 1 (unitless)
- TIFFTAG_XRESOLUTION :
- 1
- TIFFTAG_YRESOLUTION :
- 1
array([[[nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], ..., [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan]], [[nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], ..., [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan]], [[nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], ..., ... ..., [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan]], [[nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], ..., [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan]], [[nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], ..., [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan], [nan, nan, nan, ..., nan, nan, nan]]], dtype=float32)
- transform :
- (0.00044915764205976077, 0.0, 91.74808407062115, 0.0, -0.00044915764205976077, 22.467314413471293)
- crs :
- +init=epsg:4326
- res :
- (0.00044915764205976077, 0.00044915764205976077)
- is_tiled :
- 1
- nodatavals :
- (-32768.0,)
- scales :
- (1.0,)
- offsets :
- (0.0,)
- AREA_OR_POINT :
- Area
- TIFFTAG_RESOLUTIONUNIT :
- 1 (unitless)
- TIFFTAG_XRESOLUTION :
- 1
- TIFFTAG_YRESOLUTION :
- 1
#select time for 2000,2005,2010,2015,2020
=ds.sel(time=['2000-01-01', '2005-01-01', '2010-01-01', '2015-01-01', '2020-01-01'])
new
# plot the
=new.NDVI.plot(col='time', col_wrap=3, cmap='RdYlGn') #YlOrRd
g# set the title first plot ,'2000', send '2005', '2010', '2015', '2020'
= ["2000", "2005", "2010", "`2015", "2020"]
titles for ax, title in zip(g.axes.flat, titles):
='bold',fontsize=12)
ax.set_title(title,fontweight# axis x label 'Latitude' and y label 'Longitude'
'Latitude',fontsize=12)
ax.set_xlabel(# add colorbar named 'NDVI'
= g.fig.colorbar(g.collections[0], ax=g.axes, shrink=0.9)
cbar 'NDVI',fontweight='bold',fontsize=12)
cbar.ax.set_ylabel(# save the figure
'E:\python_projects\essential_visualization\NDVI.png')
plt.savefig(
# write xarray to geotiff for loop
=['2000-01-01', '2005-01-01', '2010-01-01', '2015-01-01', '2020-01-01']
time# extract mean of NDVI for each year and convert to pandas dataframe
=new.NDVI.mean(dim='time').to_pandas()
ndvi_mean
for i in time:
=i).NDVI.to_geotiff('E:\python_projects\essential_visualization\NDVI_'+i+'.tif') ds.sel(time
="time", col_wrap=4, cmap="YlGn", vmin=0, vmax=1, aspect=0.8) ds.NDVI.plot(col
<xarray.plot.facetgrid.FacetGrid at 0x27f8bc90b80>
= wxee.TimeSeries("LANDSAT/LE07/C01/T1_SR").filterDate('2000-01-01', '2001-5-20').select(['B6'])
collection = collection.aggregate_time("month", ee.Reducer.median())
monthly_ts = monthly_ts.map(lambda img: img.clip(roi))
monthly_ts_land = monthly_ts_land.wx.to_xarray(region=roi, scale=50)
s =ds['B6']*0.1
ds6=ds6-273.15 # landast 7 ds6c
EEException: User memory limit exceeded.
#eemont.listIndices()
# select time for 2000,2005,2010,2015,2020 and loop find average lst for each year
= wxee.TimeSeries("LANDSAT/LE07/C01/T1_SR").filterDate('2001-1-10', '2002-1-10')
collection = collection.aggregate_time("month", ee.Reducer.median())
monthly_ts = monthly_ts.map(lambda img: img.clip(roi))
monthly_ts_land = monthly_ts_land.wx.to_xarray(region=roi, scale=900)
ds =ds['B6']*0.1
ds6=ds6-273.15 # landast 7
ds6c=ds6c.time.values
time=[]
lst_meanfor i in time:
=ds6c.sel(time=i).mean().values
l
lst_mean.append(l)=pd.DataFrame({'lst':lst_mean,'date':time})
df'2001lst.csv',index=False) df.to_csv(
=(ee.ImageCollection('LANDSAT/LE07/C01/T1_SR')
landsat7
.filterBounds(roi)'2000-1-10', '2020-10-10')
.filterDate(
.maskClouds()
.scale()'B6'])) .select([
# version of eemont
print(eemont.__version__)
0.3.4
= landsat7.getTimeSeriesByRegions(reducer = [ee.Reducer.mean()],
ts = ['B6'],collection=fc,
bands = 30) scale
= (ee.ImageCollection("LANDSAT/LE07/C01/T1_8DAY_NDVI")
L8
.filterBounds(delta)"2020-01-01","2020-02-01")
.filterDate(
.preprocess()"NDVI"])
.spectralIndices([ .mean())
c:\Users\Hafez\miniconda3\lib\site-packages\ee_extra\QA\clouds.py:310: UserWarning: This platform is not supported for cloud masking.
warnings.warn("This platform is not supported for cloud masking.")
Exception: Sorry, satellite platform not supported for index computation!
# landsat 7 land surface temperature time series
= wxee.TimeSeries("LANDSAT/LE07/C01/T1_SR").filterDate('2000-1-10', '2020-10-10').filterBounds(roi).select(['B3','B4'])
collection = collection.aggregate_time("year", ee.Reducer.median())
monthly_ts = monthly_ts.map(lambda img: img.clip(roi)) monthly_ts_land
= monthly_ts_land.wx.to_xarray(region=roi, scale=30) ds
= wxee.TimeSeries("LANDSAT/LE07/C01/T1_SR").filterDate('2000-1-10', '2020-12-10').filterBounds(roi).select(['B6'])
collection = collection.aggregate_time("year", ee.Reducer.median())
monthly_ts = monthly_ts.map(lambda img: img.clip(roi)) monthly_ts_land
= monthly_ts_land.wx.to_xarray(region=roi, scale=30) dslst
#LANDSAT/LT05/C01/T2_SR B6 0.1
#LANDSAT/LT05/C02/T1_L2 ST_B6 0.00341802
= (ee.ImageCollection('LANDSAT/LT05/C02/T1_L2')
L8
.filterBounds(delta)'1990-01-10', '2000-12-30')
.filterDate(
.scaleAndOffset()'ST_B6'])) .select([
= L8.getTimeSeriesByRegion(geometry = delta,
ts = [ee.Reducer.min(),ee.Reducer.mean(),ee.Reducer.max()],
reducer = 30) scale
= geemap.ee_to_pandas(ts)
tsPandas tsPandas.shape
= geemap.ee_to_pandas(ts)
tsPandas # check shape of the dataframe
tsPandas.shape# replace -9999 with nan
-9999, np.nan, inplace=True)
tsPandas.replace(# metl dataframe
=pd.melt(tsPandas,id_vars=['date','reducer'],value_vars=['B6'],var_name='band',value_name='lst')
lst# convert date to datetime
'date']=pd.to_datetime(lst['date'],infer_datetime_format=True)
lst[# multiple lst by 0.1 and subtract 273.15
'lst']=lst['lst']*0.1
lst[# sort by date
='date',inplace=True)
lst.sort_values(by# select reducer=max
=lst[lst['reducer']=='max']
lst_max# date and lst and filter by date 2000
=lst_max[lst_max['date']<='2000-01-01']
lst_max_2000# write to csv lst_max_2000
'lst','date']].to_csv('E:\\my works\\delta\\data\\csv\\lst_max_landsat5_1990_2000.csv',index=False)
lst_max_2000[[# select reducer=min
=lst[lst['reducer']=='min']
lst_min# date and lst and filter by date 2000
=lst_min[lst_min['date']<='2000-01-01']
lst_min_2000# write to csv lst_min_2000
'lst','date']].to_csv('E:\\my works\\delta\\data\\csv\\lst_min_landsat5_1990_2000.csv',index=False)
lst_min_2000[[# select reducer=mean
=lst[lst['reducer']=='mean']
lst_mean# date and lst and filter by date 2000
=lst_mean[lst_mean['date']<='2000-01-01']
lst_mean_2000# write to csv lst_mean_2000
'lst','date']].to_csv('E:\\my works\\delta\\data\\csv\\lst_mean_landsat5_1990_2000.csv',index=False) lst_mean_2000[[
ds
<xarray.Dataset> Dimensions: (time: 21, y: 1039, x: 1555) Coordinates: * time (time) datetime64[ns] 2000-01-14T16:17:56 ... 2020-01-21T16:04:59 * y (y) float64 33.57 33.57 33.57 33.57 ... 33.29 33.29 33.29 33.29 * x (x) float64 -89.09 -89.09 -89.09 -89.09 ... -88.67 -88.67 -88.67 Data variables: B3 (time, y, x) float64 nan nan nan nan nan ... nan nan nan nan nan B4 (time, y, x) float64 nan nan nan nan nan ... nan nan nan nan nan Attributes: transform: (0.00026949458523585647, 0.0, -89.09032847102515... crs: +init=epsg:4326 res: (0.00026949458523585647, 0.00026949458523585647) is_tiled: 1 nodatavals: (-32768.0,) scales: (1.0,) offsets: (0.0,) AREA_OR_POINT: Area TIFFTAG_RESOLUTIONUNIT: 1 (unitless) TIFFTAG_XRESOLUTION: 1 TIFFTAG_YRESOLUTION: 1
# add variable to xarray
'ndvi']=(ds['B4']-ds['B3'])/(ds['B4']+ds['B3'])
ds[max() ds.ndvi.
<xarray.DataArray 'ndvi' ()> array(0.89584377)
min() ds.ndvi.
<xarray.DataArray 'ndvi' ()> array(-0.64008364)
=dslst['B6']*0.1
dslst=dslst-273.15 # landast 7
ds6c=ds6c.time.values time
# print max and min of lst
print(ds6c.max())
print(ds6c.min())
<xarray.DataArray 'B6' ()>
array(35.95)
<xarray.DataArray 'B6' ()>
array(-2.6)
=ds.time.values time
=ds6c.time.dt.year.values.tolist()
year1] year[
2001
= ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \
collection \
.filterBounds(delta) '2020-01-01', '2021-01-01')
.filterDate(
collection.size().getInfo()= collection.median().clip(delta) image
# facet plot , layout 7*3
=ds6c.plot(col="time", col_wrap=7, vmin=-2.9, vmax=36, cmap='jet', figsize=(25,8))
gfor i, ax in enumerate(g.axes.flat):
ax.set_title(ds6c.time.dt.year.values.tolist()[i])
ax.set_xticks([])'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_yticks([])# reduce grid space between subplots
# label colorbar
=25)
g.cbar.ax.tick_params(labelsize='Land surface temperature (°C)', size=15, weight='bold')
g.cbar.set_label(label# save figure
'E:/my works/starkville lulc/figure/landsat7_lst_map.png', dpi=400, bbox_inches='tight')
plt.savefig( plt.show()
# facet plot , layout 7*3
=ds6c.plot(col="time", col_wrap=7, vmin=12, vmax=32, cmap='jet', subplot_kws={'projection': ccrs.Robinson()})
gfor i, ax in enumerate(g.axes.flat):
ax.set_title(ds6c.time.dt.year.values.tolist()[i])
ax.coastlines()'50m'), linewidth=0.5, edgecolor='black')
ax.add_feature(cfeature.BORDERS.with_scale(# reduce space between subplots
ax.set_xticks([])'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_yticks([])# reduce grid space between subplots
# label colorbar
=25)
g.cbar.ax.tick_params(labelsize='Land surface temperature (°C)', size=15, weight='bold') g.cbar.set_label(label
='2000-02-05').plot(ax=ax, vmin=12, vmax=32, cmap='jet') ds6c.sel(time
<cartopy.mpl.geocollection.GeoQuadMesh at 0x1b5c36d6790>
# make subplot 7*3
= plt.subplots(nrows=3, ncols=7,figsize=(20,10), subplot_kw=dict(projection=ccrs.Robinson()))
fig, axes # plot using for loop
for i, ax in enumerate(axes.flat):
=ds6c.sel(time=time[i]).plot(ax=ax, vmin=12, vmax=32, cmap='jet',add_colorbar=False)
im={'fontsize':12},fontweight='bold')
ax.set_title(ds6c.time.dt.year.values.tolist()[i], fontdict# remove ylabel, xlabel
'')
ax.set_ylabel('')
ax.set_xlabel(# remove grid
False)
ax.grid(# remove ticks
ax.set_xticks([])
ax.set_yticks([])
=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Land surface temperature (°C)', size=15, weight='bold')
cbar.set_label(# save figure
'E:/my works/land use coastal zone/ctg/figure map/landsat7_lst_map.png', dpi=400, bbox_inches='tight')
fig.savefig( plt.show()
# change directory to E:\my works\delta\data\NDVI
'E:\\my works\\delta\\data\\NDBI')
os.chdir(# find all files in the directory
=glob.glob('*.tif')
filesprint(files)
['NDBI1990.tif', 'NDBI1995.tif', 'NDBI2000.tif', 'NDBI2005.tif', 'NDBI2010.tif', 'NDBI2015.tif', 'NDBI2020.tif']
# change directory to E:\my works\delta\data\NDVI
"E:\\1 Master's\\MSU\course classes\\2Summer 2022\\remote sensing\\project\\data\\LST")
os.chdir(# find all files in the directory
=glob.glob('*.tif')
filesprint(files)
['lst1985.tif', 'lst1997.tif', 'lst2009.tif', 'lst2021.tif']
=rioxarray.open_rasterio(files[0])
ds1 ds1.squeeze().plot.imshow()
<matplotlib.image.AxesImage at 0x17fb6375d30>
# open the first file in the directory and plot
2,3,1)
plt.subplot(=rioxarray.open_rasterio(files[0])
ds1# no xlabel, ylabel,
='RdYlGn')
ds1.plot(cmap'')
plt.xlabel('')
plt.ylabel(# No grid
False)
plt.grid(# no ticks
plt.xticks([])
plt.yticks([])'NDVI of '+files[0][4:8],fontdict={'fontsize':12},fontweight='bold')
plt.title(2,3,2)
plt.subplot(=rioxarray.open_rasterio(files[1])
ds2='RdYlGn')
ds2.plot(cmap'')
plt.xlabel('')
plt.ylabel(False)
plt.grid(
plt.xticks([])
plt.yticks([])'NDVI of '+files[1][4:8],fontdict={'fontsize':12},fontweight='bold')
plt.title(2,3,3)
plt.subplot(=rioxarray.open_rasterio(files[2])
ds3='RdYlGn')
ds3.plot(cmap'')
plt.xlabel('')
plt.ylabel(False)
plt.grid(
plt.xticks([])
plt.yticks([])'NDVI of '+files[2][4:8],fontdict={'fontsize':12},fontweight='bold')
plt.title(2,3,4)
plt.subplot(=rioxarray.open_rasterio(files[3]) ds4
Text(0.5, 1.0, 'NDVI of 1990')
# change directory to E:\my works\delta\data\NDVI
"E:/1 Master's/MSU/course classes/2Summer 2022/remote sensing/project/data/ndvi")
os.chdir(# find all files in the directory
=glob.glob('*.tif')
filesprint(files)
# find all max and min value of NDVI for each file
for i in range (0,4):
=rioxarray.open_rasterio(files[i])
ds# find max and min value of NDVI
print(f"NDVI of {files[i][4:8]}, min: {ds.min().values:,.2f}, and mean: {ds.mean().values:,.2f}, and max: {ds.max().values:,.2f}")
"E:/1 Master's/MSU/course classes/2Summer 2022/remote sensing/project/data/LST")
os.chdir(# find all files in the directory
=glob.glob('*.tif')
filesprint(files)
# find all max and min value of NDVI for each file
for i in range(0,4):
=rioxarray.open_rasterio(files[i])
ds# filter the data remove the values less than 0
=ds.where(ds>-3)
ds# find max and min value of NDVI
print(f"LST of {files[i][3:7]}, min: {ds.min().values:,.2f}, and mean: {ds.mean().values:,.2f}, and max: {ds.max().values:,.2f}")
['ndvi1985.tif', 'ndvi1997.tif', 'ndvi2009.tif', 'ndvi2021.tif']
NDVI of 1985, min: -0.12, and mean: 0.28, and max: 0.46
NDVI of 1997, min: -0.10, and mean: 0.28, and max: 0.42
NDVI of 2009, min: -0.12, and mean: 0.29, and max: 0.46
NDVI of 2021, min: -0.77, and mean: 0.70, and max: 0.96
['LST1985_C.tif', 'lst1997.tif', 'lst2009.tif', 'lst2021.tif']
LST of 1985, min: -2.27, and mean: 19.09, and max: 30.86
LST of 1997, min: 8.17, and mean: 16.21, and max: 25.58
LST of 2009, min: 6.06, and mean: 16.31, and max: 30.67
LST of 2021, min: 12.53, and mean: 18.86, and max: 32.76
1][4:8] files[
'1997'
# make subplot 2 row and 2 column
= plt.subplots(2, 2, figsize=(25,10))
fig, axs for i, ax in enumerate(axs.flat):
=rioxarray.open_rasterio(files[i])
ds# no xlabel, ylabel,
=ds.squeeze().plot.imshow(ax=ax, vmin=-0.77, vmax=0.9, cmap='Greens',add_colorbar=False)
im4:8],fontdict={'fontsize':20},fontweight='bold')
ax.set_title(files[i][# remove xlabel, ylabel
'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_xticks([])
ax.set_yticks([])# remove grid
False)
ax.grid(=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Normalized Difference Vegetation Index', size=15, weight='bold')
cbar.set_label(# save as jpeg
"E:\\1 Master's\\MSU\\course classes\\2Summer 2022\\remote sensing\\project\\figure\\ndvi_map.jpg", dpi=400, bbox_inches='tight') fig.savefig(
# change directory to E:\my works\delta\data\NDVI
"E:/1 Master's/MSU/course classes/2Summer 2022/remote sensing/project/data/LST")
os.chdir(# find all files in the directory
=glob.glob('*.tif')
files# make subplot 2 row and 2 column
= plt.subplots(2, 2, figsize=(25,10))
fig, axs for i, ax in enumerate(axs.flat):
=rioxarray.open_rasterio(files[i])
ds# no xlabel, ylabel,
=ds.squeeze().plot.imshow(ax=ax, vmin=-2, vmax=33, cmap='jet',add_colorbar=False)
im3:7],fontdict={'fontsize':20},fontweight='bold')
ax.set_title(files[i][# remove xlabel, ylabel
'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_xticks([])
ax.set_yticks([])# remove grid
False)
ax.grid(=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Land surface temperature(C)', size=15, weight='bold')
cbar.set_label(# save as jpeg
"E:\\1 Master's\\MSU\\course classes\\2Summer 2022\\remote sensing\\project\\figure\\lst_map.jpg", dpi=400, bbox_inches='tight') fig.savefig(
# make subplot row=2, col=3
= plt.subplots(nrows=2, ncols=4,figsize=(20,10))
fig, axes # plot using for loop
for i, ax in enumerate(axes.flat):
# break the loop when i=7
if i==7:
break
# open the file
# open the first file in the directory and plot
=rioxarray.open_rasterio(files[i])
ds# no xlabel, ylabel,
=ds.squeeze().plot.imshow(ax=ax, vmin=-0.9, vmax=0, cmap='jet',add_colorbar=False)
im4:8],fontdict={'fontsize':12},fontweight='bold')
ax.set_title(files[i][# remove xlabel, ylabel
'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_xticks([])
ax.set_yticks([])# remove grid
False)
ax.grid(=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Normalized Difference Built-Up Index', size=15, weight='bold')
cbar.set_label(# delete last subplot
1,3]) fig.delaxes(axes[
# make subplot row=2, col=3
= plt.subplots(nrows=2, ncols=4,figsize=(20,10))
fig, axes # plot using for loop
for i, ax in enumerate(axes.flat):
# open the first file in the directory and plot
=rioxarray.open_rasterio(files[i-1])
ds# no xlabel, ylabel,
=ds.squeeze().plot.imshow(ax=ax, vmin=-0.8, vmax=0.9, cmap='RdYlGn',add_colorbar=False)
im-1][4:8],fontdict={'fontsize':12},fontweight='bold')
ax.set_title(files[i# remove xlabel, ylabel
'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_xticks([])
ax.set_yticks([])# remove grid
False)
ax.grid(+=1
i=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Normalized difference vegetation index', size=15, weight='bold')
cbar.set_label(# save figure
#fig.savefig('landsat_ndvi_map.png', dpi=400, bbox_inches='tight')
# make subplot 7*3
= plt.subplots(nrows=3, ncols=7,figsize=(20,10), subplot_kw=dict(projection=ccrs.Robinson()))
fig, axes # plot using for loop
for i, ax in enumerate(axes.flat):
=ds.sel(time=time[i]).ndvi.plot(ax=ax, vmin=-0.6, vmax=0.8, cmap='RdYlGn',add_colorbar=False)
im={'fontsize':12},fontweight='bold')
ax.set_title(ds.time.dt.year.values.tolist()[i], fontdict# remove ylabel, xlabel
'')
ax.set_ylabel('')
ax.set_xlabel(# remove grid
False)
ax.grid(# remove ticks
ax.set_xticks([])
ax.set_yticks([])
=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Normalized difference vegetation index', size=15, weight='bold')
cbar.set_label(# save figure
'E:/my works/land use coastal zone/ctg/figure map/landsat7_ndvi_map.png', dpi=400, bbox_inches='tight')
fig.savefig( plt.show()
=ds.ndvi.plot(col="time", col_wrap=7, vmin=-0.64, vmax=0.9, cmap='RdYlGn', figsize=(25,8))
gfor i, ax in enumerate(g.axes.flat):
ax.set_title(ds.time.dt.year.values.tolist()[i])# reduce space between subplots
ax.set_xticks([])'')
ax.set_xlabel('')
ax.set_ylabel(
ax.set_yticks([])# reduce grid space between subplots
# label colorbar
=25)
g.cbar.ax.tick_params(labelsize'Normalized vegetation difference index', size=15, weight='bold')
g.cbar.set_label(# save figure
# save figure
'E:/my works/starkville lulc/figure/landsat7_ndvi_map.png', dpi=400, bbox_inches='tight')
plt.savefig( plt.show()
# make subplot 7*3
= plt.subplots(nrows=3, ncols=7,figsize=(20,10), subplot_kw=dict(projection=ccrs.Robinson()))
fig, axes # plot using for loop
for i, ax in enumerate(axes.flat):
=ds.sel(time=time[i]).ndvi.plot(ax=ax, vmin=-0.6, vmax=0.8, cmap='RdYlGn',add_colorbar=False)
im={'fontsize':12},fontweight='bold')
ax.set_title(ds.time.dt.year.values.tolist()[i], fontdict# remove ylabel, xlabel
'')
ax.set_ylabel('')
ax.set_xlabel(# remove grid
False)
ax.grid(# remove ticks
ax.set_xticks([])
ax.set_yticks([])
=0.8, top=0.9, bottom=0.1, hspace=0.1, wspace=0)
fig.subplots_adjust(right= fig.add_axes([0.82, 0.12, 0.05, 0.78])
cbar_ax # add colorbar named 'Land surface temperature'
= fig.colorbar(im, cax=cbar_ax, orientation='vertical')
cbar 'Normalized vegetation difference index', size=15, weight='bold')
cbar.set_label(# save figure
'E:/my works/starkville lulc/figure/landsat7_ndvi_map.png', dpi=400, bbox_inches='tight')
fig.savefig( plt.show()
max() ds.ndvi.
<xarray.DataArray 'ndvi' ()> array(0.81266968)
import hvplot.xarray
import holoviews as hv
"bokeh")
hv.extension(
ds.NDVI.hvplot(="time", clim=(0, 1), cmap="YlGn",
groupby=600, aspect=0.8,
frame_height="bottom", widget_type="scrubber",
widget_location )
Unable to display output for mime type(s):
Unable to display output for mime type(s):
'E:\my works\land use coastal zone\ctg\Data\LST') os.chdir(
=['LST2000.tif', 'LST2005.tif','LST2010.tif','LST2015.tif','LST2020.tif']
files=5
chunk=[xr.open_rasterio(x,chunks={'x':None,'y':None}) for x in files] rastterlist
C:\Users\Hafez\AppData\Local\Temp\ipykernel_11588\951139861.py:3: DeprecationWarning: open_rasterio is Deprecated in favor of rioxarray. For information about transitioning, see: https://corteva.github.io/rioxarray/stable/getting_started/getting_started.html
rastterlist=[xr.open_rasterio(x,chunks={'x':None,'y':None}) for x in files]
=xr.concat(rastterlist,dim='band')
rastters rastters
<xarray.DataArray (band: 5, y: 889, x: 491)> dask.array<concatenate, shape=(5, 889, 491), dtype=float32, chunksize=(1, 889, 491), chunktype=numpy.ndarray> Coordinates: * band (band) int64 1 1 1 1 1 * y (y) float64 2.485e+06 2.485e+06 2.485e+06 ... 2.458e+06 2.458e+06 * x (x) float64 3.711e+05 3.712e+05 3.712e+05 ... 3.858e+05 3.858e+05 Attributes: transform: (30.0, 0.0, 371115.0, 0.0, -30.0, 2485005.0) crs: +init=epsg:32646 res: (30.0, 30.0) is_tiled: 0 nodatavals: (-3.3999999521443642e+38,) scales: (1.0,) offsets: (0.0,) AREA_OR_POINT: Area
= ee.Image('JRC/GSW1_1/GlobalSurfaceWater')
gsw print(gsw.bandNames().getInfo())
['occurrence', 'change_abs', 'change_norm', 'seasonality', 'recurrence', 'transition', 'max_extent']
= gsw.select('occurrence')
occurrence = {'min': 0, 'max': 100, 'palette': ['red', 'blue']}
vis_occurrence = occurrence.gt(90).selfMask()
water_mask = {'palette': ['white', 'blue']}
vis_water_mask = geemap.Map()
Map 'HYBRID')
Map.add_basemap( Map
-90.98, 32.49, 11)
Map.setCenter('Water Mask (>90%)') Map.addLayer(water_mask, vis_water_mask,
='''
jsvar length = myjrc.size();
var list = myjrc.toList(length);
var image_name=ee.Image(list.get(1))
'''
geemap.js_snippet_to_py(js)
import ee
import geemap
= ee.ImageCollection("JRC/GSW1_0/MonthlyHistory")
jrc # Define study period
= ee.Date.fromYMD(1985, 1, 1)
startDate = ee.Date.fromYMD(1986, 12, 31)
endDate # Compute a median image and display.
# filter jrc data for country and period
= jrc.filterBounds(lmav).filterDate(startDate, endDate)
myjrc # get the size of the collection and cast colelciton to a list
=myjrc.aggregate_array('system:index').getInfo()
dates# export each to google drive for loop dates in description
=int(myjrc.size().getInfo())
countprint(count)
for i in range(0,count):
=ee.Image(myjrc.toList(count).get(i))
image=dates[i]
names# export to google drive
= ee.batch.Export.image.toDrive(image, description=names, folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:4326',scale=30,region=lmav)
task task.start()
'1985_04'
= ee.ImageCollection('LANDSAT/LT05/C01/T1_TOA') \
collection \
.filterBounds(delta) '1-01-01', '2000-04-28')
.filterDate(
collection.size().getInfo()= collection.median().clip(delta).unmask() image
='landsat2000',region=delta,max_pixels=1e13,folder='google_ras',file_format='GeoTIFF',scale=30) geemap.ee_export_image_to_drive(image,description
Exporting landsat2000 ...
=1e13,folder='google_ras',file_format='GeoTIFF',scale=30) geemap.ee_export_image_collection_to_drive(image,max_pixels
The ee_object must be an ee.ImageCollection.
= geemap.Map()
Map = {'min': 1, 'max': 37, 'palette': ['00FFFF', '0000FF']}
ndwiViz 89.43512348012679,22.095772163889887, 10) # San Francisco Bay
Map.setCenter('HYBRID')
Map.add_basemap( Map
= (ee.ImageCollection("LANDSAT/LE07/C01/T2_SR")
L5
.filterBounds(delta)"2000-01-01","2001-01-01")
.filterDate(
.preprocess()'B6'])
.select([ .median())
#LANDSAT/LE07/C01/T2_SR
= ee.ImageCollection('LANDSAT/LE07/C01/T2_SR') \
collection \
.filterBounds(delta) '2020-01-01', '2020-03-01') \
.filterDate('B6'])\
.select([ .median()
= collection.clip(delta) image
"min":0,"max":3000,'palette': ['006633', 'E5FFCC', '662A00', 'D8D8D8', 'F5F5F5']},'lst') Map.addLayer(image,{
import ee
ee.Initialize()
# ====================================================================================================#
# INPUT
# ====================================================================================================#
# --------------------------------------------------
# User Requirements
# --------------------------------------------------
= ['LST']
select_parameters = ['mean']
select_metrics = []
percentiles
# Time
= 1990
year_start = 1990
year_end = 1
month_start = 4
month_end
# Space
= delta
select_roi = 30
max_cloud_cover = 'EPSG:4326'
epsg = 30
pixel_resolution = 'MRD'
roi_filename
# --------------------------------------------------
# Algorithm Specifications
# --------------------------------------------------
= 0.6
ndvi_v = 0.2
ndvi_s
= 0.985
epsilon_v = 0.97
epsilon_s = 0.99
epsilon_w
= 8
t_threshold
# Jiménez‐Muñoz et al. (2009) (TM & ETM+) TIGR1761 and Jiménez‐Muñoz et al. (2014) OLI-TIRS GAPRI4838
= [0.04019, 0.02916, 1.01523,
cs_l8 -0.38333, -1.50294, 0.20324,
0.00918, 1.36072, -0.27514]
= [0.06518, 0.00683, 1.02717,
cs_l7 -0.53003, -1.25866, 0.10490,
-0.01965, 1.36947, -0.24310]
= [0.07518, -0.00492, 1.03189,
cs_l5 -0.59600, -1.22554, 0.08104,
-0.02767, 1.43740, -0.25844]
# ====================================================================================================#
# FUNCTIONS
# ====================================================================================================#
= {
lookup_metrics 'mean': ee.Reducer.mean(),
'min': ee.Reducer.min(),
'max': ee.Reducer.max(),
'std': ee.Reducer.stdDev(),
'median': ee.Reducer.median(),
'ts': ee.Reducer.sensSlope()
}
# --------------------------------------------------
# RENAME BANDS
# --------------------------------------------------
def fun_bands_l57(img):
= ['B1', 'B2', 'B3', 'B4', 'B5', 'B7']
bands = ['B6']
thermal_band = ['B', 'G', 'R', 'NIR', 'SWIR1', 'SWIR2']
new_bands = ['TIR']
new_thermal_bands = img.select(bands).multiply(0.0001).rename(new_bands)
vnirswir = img.select(thermal_band).multiply(0.1).rename(new_thermal_bands)
tir return vnirswir.addBands(tir).copyProperties(img, ['system:time_start'])
def fun_bands_l8(img):
= ['B2', 'B3', 'B4', 'B5', 'B6', 'B7']
bands = ['B10']
thermal_band = ['B', 'G', 'R', 'NIR', 'SWIR1', 'SWIR2']
new_bands = ['TIR']
new_thermal_bands = img.select(bands).multiply(0.0001).rename(new_bands)
vnirswir = img.select(thermal_band).multiply(0.1).rename(new_thermal_bands)
tir return vnirswir.addBands(tir).copyProperties(img, ['system:time_start'])
# --------------------------------------------------
# MASKING
# --------------------------------------------------
# Function to cloud mask Landsat TM, ETM+, OLI_TIRS Surface Reflectance Products
def fun_mask_ls_sr(img):
= ee.Number(2).pow(3).int()
cloudShadowBitMask = ee.Number(2).pow(5).int()
cloudsBitMask = ee.Number(2).pow(4).int()
snowBitMask = img.select('pixel_qa')
qa = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(
mask 0)).And(
qa.bitwiseAnd(cloudsBitMask).eq(0))
qa.bitwiseAnd(snowBitMask).eq(return img.updateMask(mask)
# Function to mask LST below certain temperature threshold
def fun_mask_T(img):
= img.select('LST').gt(t_threshold)
mask return img.updateMask(mask)
# --------------------------------------------------
# MATCHING AND CALIBRATION
# --------------------------------------------------
# Radiometric Calibration
def fun_radcal(img):
= ee.Algorithms.Landsat.calibratedRadiance(img).rename('RADIANCE')
radiance return img.addBands(radiance)
# L to ee.Image
def fun_l_addband(img):
= ee.Image(img.get('L')).select('RADIANCE').rename('L')
l return img.addBands(l)
# Create maxDifference-filter to match TOA and SR products
= ee.Filter.maxDifference(
maxDiffFilter =2 * 24 * 60 * 60 * 1000,
difference= 'system:time_start',
leftField= 'system:time_start'
rightField
)
# Define join: Water vapor
= ee.Join.saveBest(
join_wv = 'WV',
matchKey = 'timeDiff'
measureKey
)
# Define join: Radiance
= ee.Join.saveBest(
join_l = 'L',
matchKey = 'timeDiff'
measureKey
)
# --------------------------------------------------
# PARAMETER CALCULATION
# --------------------------------------------------
# NDVI
def fun_ndvi(img):
= img.normalizedDifference(['NIR', 'R']).rename('NDVI')
ndvi return img.addBands(ndvi)
def fun_ndwi(img):
= img.normalizedDifference(['NIR', 'SWIR1']).rename('NDWI')
ndwi return img.addBands(ndwi)
# Tasseled Cap Transformation (brightness, greenness, wetness) based on Christ (1985)
def fun_tcg(img):
= img.expression(
tcg 'B*(-0.1603) + G*(-0.2819) + R*(-0.4934) + NIR*0.7940 + SWIR1*(-0.0002) + SWIR2*(-0.1446)',
{'B': img.select(['B']),
'G': img.select(['G']),
'R': img.select(['R']),
'NIR': img.select(['NIR']),
'SWIR1': img.select(['SWIR1']),
'SWIR2': img.select(['SWIR2'])
'TCG')
}).rename(return img.addBands(tcg)
def fun_tcb(img):
= img.expression(
tcb 'B*0.2043 + G*0.4158 + R*0.5524 + NIR*0.5741 + SWIR1*0.3124 + SWIR2*0.2303',
{'B': img.select(['B']),
'G': img.select(['G']),
'R': img.select(['R']),
'NIR': img.select(['NIR']),
'SWIR1': img.select(['SWIR1']),
'SWIR2': img.select(['SWIR2'])
'TCB')
}).rename(return img.addBands(tcb)
def fun_tcw(img):
= img.expression(
tcw 'B*0.0315 + G*0.2021 + R*0.3102 + NIR*0.1594 + SWIR1*(-0.6806) + SWIR2*(-0.6109)',
{'B': img.select(['B']),
'G': img.select(['G']),
'R': img.select(['R']),
'NIR': img.select(['NIR']),
'SWIR1': img.select(['SWIR1']),
'SWIR2': img.select(['SWIR2'])
'TCW')
}).rename(return img.addBands(tcw)
# Fraction Vegetation Cover (FVC)
def fun_fvc(img):
= img.expression(
fvc '((NDVI-NDVI_s)/(NDVI_v-NDVI_s))**2',
{'NDVI': img.select('NDVI'),
'NDVI_s': ndvi_s,
'NDVI_v': ndvi_v
}'FVC')
).rename(return img.addBands(fvc)
# Scale Emissivity (Epsilon) between NDVI_s and NDVI_v
def fun_epsilon_scale(img):
= img.expression(
epsilon_scale 'epsilon_s+(epsilon_v-epsilon_s)*FVC',
{'FVC': img.select('FVC'),
'epsilon_s': epsilon_s,
'epsilon_v': epsilon_v
}'EPSILON_SCALE')
).rename(return img.addBands(epsilon_scale)
# Emissivity (Epsilon)
def fun_epsilon(img):
= img.select(['NDVI']).set('system:time_start', img.get('system:time_start'))
pseudo = pseudo.where(img.expression('NDVI > NDVI_v',
epsilon 'NDVI': img.select('NDVI'),
{'NDVI_v': ndvi_v}), epsilon_v)
= epsilon.where(img.expression('NDVI < NDVI_s && NDVI >= 0',
epsilon 'NDVI': img.select('NDVI'),
{'NDVI_s': ndvi_s}), epsilon_s)
= epsilon.where(img.expression('NDVI < 0',
epsilon 'NDVI': img.select('NDVI')}), epsilon_w)
{= epsilon.where(img.expression('NDVI <= NDVI_v && NDVI >= NDVI_s',
epsilon 'NDVI': img.select('NDVI'),
{'NDVI_v': ndvi_v,
'NDVI_s': ndvi_s}), img.select('EPSILON_SCALE')).rename('EPSILON')
return img.addBands(epsilon)
# Function to scale WV content product
def fun_wv_scale(img):
= ee.Image(img.get('WV')).multiply(0.1).rename('WV_SCALED')
wv_scaled = wv_scaled.resample('bilinear')
wv_scaled return img.addBands(wv_scaled)
# --------------------------------------------------
# LAND SURFACE TEMPERATURE CALCULATION
# --------------------------------------------------
# Atmospheric Functions
def fun_af1(cs):
def wrap(img):
= img.expression(
af1 '('+str(cs[0])+'*(WV**2))+('+str(cs[1])+'*WV)+('+str(cs[2])+')',
{'WV': img.select('WV_SCALED')
}'AF1')
).rename(return img.addBands(af1)
return wrap
def fun_af2(cs):
def wrap(img):
= img.expression(
af2 '('+str(cs[3])+'*(WV**2))+('+str(cs[4])+'*WV)+('+str(cs[5])+')',
{'WV': img.select('WV_SCALED')
}'AF2')
).rename(return img.addBands(af2)
return wrap
def fun_af3(cs):
def wrap(img):
= img.expression(
af3 '('+str(cs[6])+'*(WV**2))+('+str(cs[7])+'*WV)+('+str(cs[8])+')',
{'WV': img.select('WV_SCALED')
}'AF3')
).rename(return img.addBands(af3)
return wrap
# Gamma Functions
def fun_gamma_l8(img):
= img.expression('(BT**2)/(1324*L)',
gamma 'BT': img.select('TIR'),
{'L': img.select('L')
'GAMMA')
}).rename(return img.addBands(gamma)
def fun_gamma_l7(img):
= img.expression('(BT**2)/(1277*L)',
gamma 'BT': img.select('TIR'),
{'L': img.select('L')
'GAMMA')
}).rename(return img.addBands(gamma)
def fun_gamma_l5(img):
= img.expression('(BT**2)/(1256*L)',
gamma 'BT': img.select('TIR'),
{'L': img.select('L')
'GAMMA')
}).rename(return img.addBands(gamma)
# Delta Functions
def fun_delta_l8(img):
= img.expression('BT-((BT**2)/1324)',
delta 'BT': img.select('TIR')
{'DELTA')
}).rename(return img.addBands(delta)
def fun_delta_l7(img):
= img.expression('BT-((BT**2)/1277)',
delta 'BT': img.select('TIR')
{'DELTA')
}).rename(return img.addBands(delta)
def fun_delta_l5(img):
= img.expression('BT-((BT**2)/1256)',
delta 'BT': img.select('TIR')
{'DELTA')
}).rename(return img.addBands(delta)
# Land Surface Temperature
def fun_lst(img):
= img.expression(
lst '(GAMMA*(((1/EPSILON)*(AF1*L+AF2))+AF3)+DELTA)-273.15',
{'GAMMA': img.select('GAMMA'),
'DELTA': img.select('DELTA'),
'EPSILON': img.select('EPSILON'),
'AF1': img.select('AF1'),
'AF2': img.select('AF2'),
'AF3': img.select('AF3'),
'L': img.select('L')
}'LST')
).rename(return img.addBands(lst)
def fun_mask_lst(img):
= img.select('LST').gt(t_threshold)
mask return img.updateMask(mask)
# --------------------------------------------------
# MOSAICKING
# --------------------------------------------------
def fun_date(img):
return ee.Date(ee.Image(img).date().format("YYYY-MM-dd"))
def fun_getdates(imgCol):
return ee.List(imgCol.toList(imgCol.size()).map(fun_date))
def fun_mosaic(date, newList):
# cast list & date
= ee.List(newList)
newList = ee.Date(date)
date
# filter img-collection
= ee.ImageCollection(subCol.filterDate(date, date.advance(1, 'day')))
filtered
# check duplicate
= ee.Image(newList.get(-1))
img_previous = img_previous.date().format("YYYY-MM-dd")
img_previous_datestring = ee.Number(ee.Date(img_previous_datestring).millis())
img_previous_millis
= filtered.select(parameter).first().date().format("YYYY-MM-dd")
img_new_datestring = ee.Date(img_new_datestring).millis()
img_new_date = ee.Number(ee.Date(img_new_datestring).millis())
img_new_millis
= img_previous_millis.subtract(img_new_millis)
date_diff
# mosaic
= ee.Algorithms.If(
img_mosaic 0),
date_diff.neq(set('system:time_start', img_new_date),
filtered.select(parameter).mosaic().1, 'day')))
ee.ImageCollection(subCol.filterDate(pseudodate, pseudodate.advance(
)
= ee.Algorithms.If(date_diff.neq(0), ee.Number(1), ee.Number(0))
tester
return ee.Algorithms.If(tester, newList.add(img_mosaic), newList)
def fun_timeband(img):
= ee.Image(img.metadata('system:time_start', 'TIME').divide(86400000))
time = time.updateMask(img.select(parameter).mask())
timeband return img.addBands(timeband)
# ====================================================================================================#
# EXECUTE
# ====================================================================================================#
# --------------------------------------------------
# TRANSFORM CLIENT TO SERVER SIDE
# --------------------------------------------------
= ee.Number(ndvi_v)
ndvi_v = ee.Number(ndvi_s)
ndvi_s
= ee.Number(epsilon_v)
epsilon_v = ee.Number(epsilon_s)
epsilon_s = ee.Number(epsilon_w)
epsilon_w
= ee.Number(t_threshold)
t_threshold
# --------------------------------------------------
# IMPORT IMAGE COLLECTIONS
# --------------------------------------------------
# Landsat 5 TM
= ee.ImageCollection('LANDSAT/LT05/C01/T1')\
imgCol_L5_TOA \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.'B6'])
.select([
= ee.ImageCollection('LANDSAT/LT05/C01/T1_SR')\
imgCol_L5_SR \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.map(fun_mask_ls_sr)
.
= imgCol_L5_SR.map(fun_bands_l57)
imgCol_L5_SR
# Landsat 7 ETM+
= ee.ImageCollection('LANDSAT/LE07/C01/T1')\
imgCol_L7_TOA \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.'B6_VCID_2'])
.select([
= ee.ImageCollection('LANDSAT/LE07/C01/T1_SR')\
imgCol_L7_SR \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.map(fun_mask_ls_sr)
.
= imgCol_L7_SR.map(fun_bands_l57)
imgCol_L7_SR
# Landsat 8 OLI-TIRS
= ee.ImageCollection('LANDSAT/LC08/C01/T1')\
imgCol_L8_TOA \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.'B10'])
.select([
= ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')\
imgCol_L8_SR \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))\
.filter(ee.Filter.lt('CLOUD_COVER_LAND', max_cloud_cover))\
.map(fun_mask_ls_sr)
.
= imgCol_L8_SR.map(fun_bands_l8)
imgCol_L8_SR
# NCEP/NCAR Water Vapor Product
= ee.ImageCollection('NCEP_RE/surface_wv')\
imgCol_WV \
.filterBounds(select_roi)filter(ee.Filter.calendarRange(year_start,year_end,'year'))\
.filter(ee.Filter.calendarRange(month_start,month_end,'month'))
.
# --------------------------------------------------
# CALCULATE
# --------------------------------------------------
# TOA (Radiance) and SR
= imgCol_L5_TOA.map(fun_radcal)
imgCol_L5_TOA = imgCol_L7_TOA.map(fun_radcal)
imgCol_L7_TOA = imgCol_L8_TOA.map(fun_radcal)
imgCol_L8_TOA
= ee.ImageCollection(join_l.apply(imgCol_L5_SR, imgCol_L5_TOA, maxDiffFilter))
imgCol_L5_SR = ee.ImageCollection(join_l.apply(imgCol_L7_SR, imgCol_L7_TOA, maxDiffFilter))
imgCol_L7_SR = ee.ImageCollection(join_l.apply(imgCol_L8_SR, imgCol_L8_TOA, maxDiffFilter))
imgCol_L8_SR
= imgCol_L5_SR.map(fun_l_addband)
imgCol_L5_SR = imgCol_L7_SR.map(fun_l_addband)
imgCol_L7_SR = imgCol_L8_SR.map(fun_l_addband)
imgCol_L8_SR
# Water Vapor
= ee.ImageCollection(join_wv.apply(imgCol_L5_SR, imgCol_WV, maxDiffFilter))
imgCol_L5_SR = ee.ImageCollection(join_wv.apply(imgCol_L7_SR, imgCol_WV, maxDiffFilter))
imgCol_L7_SR = ee.ImageCollection(join_wv.apply(imgCol_L8_SR, imgCol_WV, maxDiffFilter))
imgCol_L8_SR
= imgCol_L5_SR.map(fun_wv_scale)
imgCol_L5_SR = imgCol_L7_SR.map(fun_wv_scale)
imgCol_L7_SR = imgCol_L8_SR.map(fun_wv_scale)
imgCol_L8_SR
# Atmospheric Functions
= imgCol_L5_SR.map(fun_af1(cs_l5))
imgCol_L5_SR = imgCol_L5_SR.map(fun_af2(cs_l5))
imgCol_L5_SR = imgCol_L5_SR.map(fun_af3(cs_l5))
imgCol_L5_SR
= imgCol_L7_SR.map(fun_af1(cs_l7))
imgCol_L7_SR = imgCol_L7_SR.map(fun_af2(cs_l7))
imgCol_L7_SR = imgCol_L7_SR.map(fun_af3(cs_l7))
imgCol_L7_SR
= imgCol_L8_SR.map(fun_af1(cs_l8))
imgCol_L8_SR = imgCol_L8_SR.map(fun_af2(cs_l8))
imgCol_L8_SR = imgCol_L8_SR.map(fun_af3(cs_l8))
imgCol_L8_SR
# Delta and Gamma Functions
= imgCol_L5_SR.map(fun_delta_l5)
imgCol_L5_SR = imgCol_L7_SR.map(fun_delta_l7)
imgCol_L7_SR = imgCol_L8_SR.map(fun_delta_l8)
imgCol_L8_SR
= imgCol_L5_SR.map(fun_gamma_l5)
imgCol_L5_SR = imgCol_L7_SR.map(fun_gamma_l7)
imgCol_L7_SR = imgCol_L8_SR.map(fun_gamma_l8)
imgCol_L8_SR
# Merge Collections
= imgCol_L8_SR.merge(imgCol_L7_SR).merge(imgCol_L5_SR)
imgCol_merge = imgCol_merge.sort('system:time_start')
imgCol_merge
# Parameters and Indices
= imgCol_merge.map(fun_ndvi)
imgCol_merge = imgCol_merge.map(fun_ndwi)
imgCol_merge = imgCol_merge.map(fun_tcg)
imgCol_merge = imgCol_merge.map(fun_tcb)
imgCol_merge = imgCol_merge.map(fun_tcw)
imgCol_merge
= imgCol_merge.map(fun_fvc)
imgCol_merge = imgCol_merge.map(fun_epsilon_scale)
imgCol_merge = imgCol_merge.map(fun_epsilon)
imgCol_merge
# LST
= imgCol_merge.map(fun_lst)
imgCol_merge = imgCol_merge.map(fun_mask_lst)
imgCol_merge
# --------------------------------------------------
# SPECTRAL TEMPORAL METRICS
# --------------------------------------------------
# Iterate over parameters and metrics
for parameter in select_parameters:
# Mosaic imgCollection
= ee.Date('1960-01-01')
pseudodate = ee.ImageCollection(imgCol_merge.select(parameter))
subCol = fun_getdates(subCol)
dates = ee.Date(dates.get(0))
ini_date = subCol.filterDate(ini_date, ini_date.advance(1, 'day'))
ini_merge = ini_merge.select(parameter).mosaic().set('system:time_start', ini_date.millis())
ini_merge = ee.List([ini_merge])
ini = ee.ImageCollection(ee.List(dates.iterate(fun_mosaic, ini)))
imgCol_mosaic = imgCol_mosaic.map(fun_timeband)
imgCol_mosaic
for metric in select_metrics:
if metric == 'ts':
= imgCol_mosaic.select(['TIME', parameter]).reduce(ee.Reducer.sensSlope())
temp = temp.select('slope')
temp = temp.multiply(365)
temp = temp.multiply(100000000).int32()
temp elif metric == 'nobs':
= imgCol_mosaic.select(parameter).count()
temp = temp.int16()
temp else:
if metric == 'percentile':
= imgCol_mosaic.select(parameter).reduce(ee.Reducer.percentile(percentiles))
temp else:
= lookup_metrics[metric]
reducer = imgCol_mosaic.select(parameter).reduce(reducer)
temp if parameter == 'LST':
= temp.multiply(100).int16()
temp else:
= temp.multiply(10000).int16()
temp
# Export to Drive
= parameter+'_'+roi_filename+'_GEE_'+str(year_start)+'-'+str(year_end)+'_'+\
filename str(month_start)+'-'+str(month_end)+'_'+metric
= ee.batch.Export.image.toDrive(image=temp, description=filename,
out =pixel_resolution,
scale=1e13,
maxPixels=delta,
region=epsg)
crs= ee.batch.Task.start(out) process
= ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \
collection \
.filterBounds(lmav) '2020-01-01', '2021-12-30')
.filterDate(# select B5 and B4
= collection.select(['B5', 'B4'])
collection # calculate NDVI over the collection
def fun_ndvi(img):
return img.addBands(img.normalizedDifference(['B5', 'B4']).rename('NDVI'))
= collection.map(fun_ndvi)
ndwi # monthly aggregation
= ndwi.aggregate_array('system:time_start').map(
dates lambda d: ee.Date(d).format('YYYY-MM-dd')
)# extract water mask
def extract_water(img):
= img.normalizedDifference(['B5', 'B4'])
ndwi =ndwi.gt(0)
water_imagereturn water_image
=ndwi.map(extract_water)
ndwi_images= ndwi_images.map(lambda img: img.selfMask())
water_images =water_images.aggregate_array('system:index').getInfo()
dates# export each to google drive for loop dates in description
=int(water_images.size().getInfo())
countprint(count)
for i in range(0,count):
=ee.Image(water_images.toList(count).get(i))
image=dates[i]
names# show progress with progress bar with jupyter notebook
# from tqdm import tqdm
# export to google drive
= ee.batch.Export.image.toDrive(image, description=names, folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:4326',scale=30,region=lmav)
task task.start()
41.02
=ee.ImageCollection('COPERNICUS/S1_GRD') \
s1\
.filterBounds(lmav) '2020-01-01', '2021-12-30')
.filterDate(# speckle filtering
def filterSpeckle(img):
=img.select('VV')
vv=vv.focal_median(100,'circle','meters').rename('VV_filtered')
vv_smoothreturn img.addBands(vv_smooth)
=s1.map(filterSpeckle) filtered
#projects/ee-hafezahmad10/assets/
=ee.FeatureCollection('projects/ee-hafezahmad10/assets/huc10_h_orconectoides').geometry()
stream= ee.FeatureCollection('USGS/WBD/2017/HUC10')
dataset def clip(img):
return img.clip(stream)
# clip to stream
=dataset.filterBounds(stream)
stream_clip= ee.ImageCollection('USGS/3DEP/1m')\
dataset filter(ee.Filter.date('1998-12-01', '2019-12-31'))\
.
.filterBounds(stream)# clip to stream
=dataset.map(clip) clipeddem
#EPSG:26916 for NAD83 / UTM zone 16N <br>
= ee.batch.Export.image.toDrive(image=clipeddem, description='3DEP', folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:26916',scale=1,region=stream)
task task.start()
map=geemap.Map(center=[ 33.669,-91.021], zoom=6)
map.addLayer(stream, {}, 'stream')
map.addLayer(stream_clip, {}, 'stream_clip')
map
# ndvi and lst calculation
= ee.ImageCollection('LANDSAT/LT05/C02/T1_L2') \
col '1985-01-01','1985-6-30') \
.filterDate(
.filterBounds(roi)= col.median()
image # SR_b4 is near infrared band for landsat 5
# SR_b3 is red band for landsat 5
# SR_b2 is green band for landsat 5
# SR_b1 is blue band for landsat 5
# ST b6 is thermal band for landsat 5 in kelvin , multiply by 0.00341802 and add by 149 to get the temperature in kelvin
# calculate the NDVI for single image
= image.normalizedDifference(['SR_B4','SR_B3']).rename('NDVI')
ndvi #select thermal band 10(with brightness tempereature)/ band 6, no calculation
= image.select('ST_B6').multiply(0.00341802).add(149)
thermal# find the min and max of NDVI
min = ee.Number(ndvi.reduceRegion({'reducer': ee.Reducer.min(),'geometry': roi,'scale': 30,'maxPixels': 1e9}).values().get(0))
print(min, 'min')
max = ee.Number(ndvi.reduceRegion({'reducer': ee.Reducer.max(),'geometry': roi,'scale': 30,'maxPixels': 1e9}).values().get(0))
print(max, 'max')
#fractional vegetation
=(ndvi.subtract(min).divide(max.subtract(min))).pow(ee.Number(2)).rename('FV')
fv #Emissivity
= ee.Number(0.004)
a= ee.Number(0.986)
b=fv.multiply(a).add(b).rename('EMM')
EM#LST in Celsius Degree bring -273.15
#NB: In Kelvin don't bring -273.15
= thermal.expression('(Tb/(1 + (0.00115* (Tb / 1.438))*log(Ep)))-273.15', {'Tb': thermal.select('ST_B6'),'Ep': EM.select('EMM')}).rename('LST')
LST # display the LST
'min': -10, 'max':40.328077233404645, 'palette': [
Map.addLayer(LST.clip(roi), {'040274', '040281', '0502a3', '0502b8', '0502ce', '0502e6',
'0602ff', '235cb1', '307ef3', '269db1', '30c8e2', '32d3ef',
'3be285', '3ff38f', '86e26f', '3ae237', 'b5e22e', 'd6e21f',
'fff705', 'ffd611', 'ffb613', 'ff8b13', 'ff6e08', 'ff500d',
'ff0000', 'de0101', 'c21301', 'a71001', '911003']},'LST19985')
# ndvi export
=ee.batch.Export.image.toDrive(ndvi.clip(roi), description='ndvi', folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:26915',scale=30,region=roi)
task1
task1.start()# lst export
=ee.batch.Export.image.toDrive(LST.clip(roi), description='lst1985', folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:26915',scale=30,region=roi)
task1 task1.start()
#// now generate 12 month image,apply speckle noise and initiate thresholding and export and then add to maplayer using for loop
for i in range(0,12):
='2020'
year=s1.filter(ee.Filter.calendarRange(i,i,'month'))
s1_month=s1_month.map(filterSpeckle)
filtered=filtered.select('VV_filtered').median().lt(-14).rename('water').selfMask()
flooded# export to google drive
= ee.batch.Export.image.toDrive(flooded.clip(lmav), description=str(i)+'_'+year, folder='google_ras',maxPixels=1e13,fileFormat='GeoTIFF',crs='EPSG:32615',scale=10,region=lmav)
task task.start()
=sankee.datasets.NLCD
datasets=[2001,2006,2011,2016,2019]
years
sankee.datasets.NLCD.sankify(=years,
years=roi,
region=20
max_classes )
Unable to display output for mime type(s): application/vnd.plotly.v1+json
=[10.1,10,'1/1/2001',4.5,'1/10/2009',4.5,'1/10/2009',23.0,'1/10/2003']
flood# make dataframe
=pd.DataFrame({"flood":flood})
df# add new column
'date']= 0
df[# if flood column contain "/" symbol, then move to one cell above next column
for index, row in df.iterrows():
if "/" in str(row['flood']):
# just move flood column value to next date column
-1,'date']=df.loc[index,'flood']
df.loc[index# replace flood column value with 0
'flood']=0
df.loc[index,else:
#keeo date column as it is
'flood']=row['flood']
df.loc[index, df
flood | date | |
---|---|---|
0 | 10.1 | 0 |
1 | 10 | 1/1/2001 |
2 | 0 | 0 |
3 | 4.5 | 1/10/2009 |
4 | 0 | 0 |
5 | 4.5 | 1/10/2009 |
6 | 0 | 0 |
7 | 23.0 | 1/10/2003 |
8 | 0 | 0 |
'W:/Home/hahmad/public/temp _project/landsat/train sample') os.chdir(
=gpd.read_file('land2020.shp')
training_vectors
# change the column name
={'Classname':'name'},inplace=True)
training_vectors.rename(columns2) training_vectors.head(
name | Classvalue | RED | GREEN | BLUE | Count | geometry | |
---|---|---|---|---|---|---|---|
0 | Builtup | 14 | 249 | 164 | 27 | 633 | MULTIPOLYGON (((91.77465 22.30005, 91.77416 22... |
1 | Forest | 7 | 230 | 69 | 99 | 788 | MULTIPOLYGON (((91.78340 22.30008, 91.78336 22... |
#sentineltrain.shp
=gpd.read_file('sentineltrain.shp')
training_vectors
# change the column name
={'Classname':'name'},inplace=True)
training_vectors.rename(columns2) training_vectors.head(
name | Classvalue | RED | GREEN | BLUE | Count | geometry | |
---|---|---|---|---|---|---|---|
0 | water | 1 | 166 | 134 | 144 | 1864 | MULTIPOLYGON (((376488.292 2466751.725, 376367... |
1 | vegetation | 6 | 52 | 166 | 9 | 1183 | MULTIPOLYGON (((378511.947 2471297.397, 378436... |
# find all unique values of training data names to use as classes
= np.unique(training_vectors.name)
classes # classes = np.array(sorted(training_vectors.name.unique()))
classes
array(['Builtup', 'barrenland', 'vegetation', 'water'], dtype=object)
# create class dictionary
=dict(zip(classes,range(0,len(classes))))
class_dict class_dict
{'Builtup': 0, 'barrenland': 1, 'vegetation': 2, 'water': 3}
training_vectors.total_bounds.tolist()
[91.76241212436037, 22.299645961558667, 91.87913943267228, 22.448354029125426]
#training_vectors.total_bounds.tolist()
= ee.Geometry.Rectangle(training_vectors.total_bounds.tolist())
aoi = ('B2', 'B3', 'B4', 'B8', 'B11', 'B12')
band_sel
= ee.ImageCollection("COPERNICUS/S2")\
sentinel_scenes \
.filterBounds(aoi)'2019-01-02', '2019-06-03')\
.filterDate(\
.select(band_sel)filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 5))
.
= sentinel_scenes.getInfo()
scenes print(scene['id']) for scene in scenes["features"]]
[= sentinel_scenes.mean().rename(band_sel)
sentinel_mosaic # add map layer
= sentinel_mosaic.clip(aoi) sentinel_mosaic
COPERNICUS/S2/20190102T043151_20190102T043514_T46QCK
COPERNICUS/S2/20190104T042149_20190104T042450_T46QCK
COPERNICUS/S2/20190107T043149_20190107T043708_T46QCK
COPERNICUS/S2/20190109T042141_20190109T042619_T46QCK
COPERNICUS/S2/20190112T043141_20190112T043642_T46QCK
COPERNICUS/S2/20190117T043119_20190117T043639_T46QCK
COPERNICUS/S2/20190119T042111_20190119T042521_T46QCK
COPERNICUS/S2/20190122T043101_20190122T043322_T46QCK
COPERNICUS/S2/20190124T042049_20190124T042052_T46QCK
COPERNICUS/S2/20190127T043039_20190127T043627_T46QCK
COPERNICUS/S2/20190201T043011_20190201T044135_T46QCK
COPERNICUS/S2/20190203T041959_20190203T042111_T46QCK
COPERNICUS/S2/20190206T042949_20190206T043539_T46QCK
COPERNICUS/S2/20190208T041931_20190208T042943_T46QCK
COPERNICUS/S2/20190213T041859_20190213T042733_T46QCK
COPERNICUS/S2/20190223T041759_20190223T042551_T46QCK
COPERNICUS/S2/20190303T042701_20190303T042658_T46QCK
COPERNICUS/S2/20190310T041601_20190310T041832_T46QCK
COPERNICUS/S2/20190313T042701_20190313T043354_T46QCK
COPERNICUS/S2/20190315T041549_20190315T042425_T46QCK
COPERNICUS/S2/20190320T041551_20190320T042654_T46QCK
COPERNICUS/S2/20190323T042701_20190323T043918_T46QCK
COPERNICUS/S2/20190325T041549_20190325T042403_T46QCK
COPERNICUS/S2/20190328T042709_20190328T043805_T46QCK
COPERNICUS/S2/20190330T041551_20190330T043120_T46QCK
COPERNICUS/S2/20190409T041551_20190409T041550_T46QCK
COPERNICUS/S2/20190412T042711_20190412T042705_T46QCK
COPERNICUS/S2/20190414T041559_20190414T043038_T46QCK
COPERNICUS/S2/20190419T041551_20190419T042804_T46QCK
COPERNICUS/S2/20190424T041559_20190424T042606_T46QCK
COPERNICUS/S2/20190507T042709_20190507T043011_T46QCK
COPERNICUS/S2/20190512T042711_20190512T042707_T46QCK
COPERNICUS/S2/20190514T041559_20190514T043124_T46QCK
COPERNICUS/S2/20190517T042709_20190517T043610_T46QCK
COPERNICUS/S2/20190529T041551_20190529T043058_T46QCK
# current directory
=os.getcwd()
root_dirprint(root_dir)
# We will save it to Google Drive for later reuse
= op.join(root_dir,'sentinel_mosaic-Trans_Nzoia')
raster_name print(raster_name)
= ee.batch.Export.image.toDrive(**{
task 'image': sentinel_mosaic,
'description': 'Trans_nzoia_2019_05_02',
'folder': op.basename(root_dir),
'fileNamePrefix': raster_name,
'scale': 10,
'region': aoi,
'crs':'EPSG:32646',
'fileFormat': 'GeoTIFF',
'formatOptions': {
'cloudOptimized': 'true'
},
}).start()## WGS_1984_UTM_Zone_46N for chittagong, EPSG:32646
W:\Home\hahmad\public\temp _project\landsat\train sample
W:\Home\hahmad\public\temp _project\landsat\train sample\sentinel_mosaic-Trans_Nzoia
%%time
# raster information
= op.join(root_dir, 'W__Home_hahmad_public_temp _project_landsat_train sample_sentinel_mosaic-Trans_Nzoia.tif')
raster_file print(raster_file)
# raster_file = '/content/drive/Shared drives/servir-sat-ml/data/gee_sentinel_mosaic-Trans_Nzoia.tif'
# a custom function for getting each value from the raster
def all_values(x):
return x
# this larger cell reads data from a raster file for each training vector
= []
X_raw = []
y_raw with rasterio.open(raster_file, 'r') as src:
for (label, geom) in zip(training_vectors.name, training_vectors.geometry):
# read the raster data matching the geometry bounds
= bounds_window(geom.bounds, src.transform)
window # store our window information
= src.window_transform(window)
window_affine = src.read(window=window)
fsrc # rasterize the (non-buffered) geometry into the larger shape and affine
= rasterize(
mask 1)],
[(geom, =fsrc.shape[1:],
out_shape=window_affine,
transform=0,
fill='uint8',
dtype=True
all_touchedbool)
).astype(# for each label pixel (places where the mask is true)...
= np.argwhere(mask)
label_pixels for (row, col) in label_pixels:
# add a pixel of data to X
= fsrc[:,row,col]
data = np.nan_to_num(data, nan=1e-3)
one_x
X_raw.append(one_x)# add the label to y
y_raw.append(class_dict[label])
W:\Home\hahmad\public\temp _project\landsat\train sample\W__Home_hahmad_public_temp _project_landsat_train sample_sentinel_mosaic-Trans_Nzoia.tif
CPU times: total: 844 ms
Wall time: 2.16 s
# convert the training data lists into the appropriate shape and format for scikit-learn
= np.array(X_raw)
X = np.array(y_raw)
y (X.shape, y.shape)
((5549, 6), (5549,))
# helper function for calculating ND*I indices (bands in the final dimension)
def band_index(arr, a, b):
return np.expand_dims((arr[..., a] - arr[..., b]) / (arr[..., a] + arr[..., b]), axis=1)
= band_index(X, 3, 2)
ndvi = band_index(X, 1, 3)
ndwi
= np.concatenate([X, ndvi, ndwi], axis=1)
X X.shape
(5549, 8)
# split the data into test and train sets
= train_test_split(X, y, test_size=0.2, random_state=42) X_train, X_test, y_train, y_test
# calculate class weights to allow for training on inbalanced training samples
= np.unique(y_train, return_counts=True)
labels, counts = dict(zip(labels, 1 / counts))
class_weight_dict class_weight_dict
{0: 0.0007309941520467836,
1: 0.004651162790697674,
2: 0.0008665511265164644,
3: 0.0005875440658049354}
%%time
# initialize a lightgbm
= lgb.LGBMClassifier(
lgbm ='multiclass',
objective= class_weight_dict,
class_weight = len(class_dict),
num_class = 'multi_logloss') metric
CPU times: total: 0 ns
Wall time: 0 ns
%%time
# fit the model to the data (training)
lgbm.fit(X_train, y_train)
CPU times: total: 969 ms
Wall time: 557 ms
LGBMClassifier(class_weight={0: 0.0002421893921046258,
1: 0.00017661603673613564,
2: 0.0001328550551348479,
3: 0.00026219192448872575},
metric='multi_logloss', num_class=4, objective='multiclass')
import xgboost as xgb
from sklearn.model_selection import GridSearchCV
= xgb.XGBClassifier() xgb
%%time
= {'learning_rate': [0.001, 0.1, 0.5],
param_dict 'n_estimators': [150, 200, 300, 1000]}
= GridSearchCV(xgb , param_grid = param_dict, cv=2)
grid_search grid_search.fit(X_train, y_train)
GridSearchCV(cv=2,
estimator=XGBClassifier(base_score=None, booster=None,
callbacks=None, colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
early_stopping_rounds=None,
enable_categorical=False, eval_metric=None,
gamma=None, gpu_id=None, grow_policy=None,
importance_type=None,
interaction_constraints=None,
learning_rate=None, max_bin=None,
max_cat_to_onehot=None,
max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None,
random_state=None, reg_alpha=None,
reg_lambda=None, ...),
param_grid={'learning_rate': [0.001, 0.1, 0.5],
'n_estimators': [150, 200, 300, 1000]})
grid_search.best_params_
{'learning_rate': 0.5, 'n_estimators': 300}
%%time
from xgboost import XGBClassifier
= XGBClassifier(max_depth=30, learning_rate=0.5, n_estimators=300, n_jobs=-1, class_weight = class_weight_dict)
xgb_1 xgb_1.fit(X_train, y_train)
[11:48:16] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.6.0/src/learner.cc:627:
Parameters: { "class_weight" } might not be used.
This could be a false alarm, with some parameters getting used by language bindings but
then being mistakenly passed down to XGBoost core, or some parameter actually being used
but getting flagged wrongly here. Please open an issue if you find any such cases.
CPU times: total: 3.84 s
Wall time: 3.81 s
XGBClassifier(base_score=0.5, booster='gbtree', callbacks=None,
class_weight={0: 0.0007309941520467836, 1: 0.004651162790697674,
2: 0.0008665511265164644,
3: 0.0005875440658049354},
colsample_bylevel=1, colsample_bynode=1, colsample_bytree=1,
early_stopping_rounds=None, enable_categorical=False,
eval_metric=None, gamma=0, gpu_id=-1, grow_policy='depthwise',
importance_type=None, interaction_constraints='',
learning_rate=0.5, max_bin=256, max_cat_to_onehot=4,
max_delta_step=0, max_depth=30, max_leaves=0, min_child_weight=1,
missing=nan, monotone_constraints='()', n_estimators=300,
n_jobs=-1, num_parallel_tree=1, objective='multi:softprob',
predictor='auto', random_state=0, ...)
= xgb_1.predict(X_test)
preds_xgb = confusion_matrix(y_test, preds_xgb, labels=labels) cm_xgb
# calculate the accuracy of the model
from sklearn.metrics import accuracy_score
accuracy_score(y_test, preds_xgb)# calculate overal accuracy and kappa score
from sklearn.metrics import accuracy_score, cohen_kappa_score
accuracy_score(y_test, preds_xgb) cohen_kappa_score(y_test, preds_xgb)
1.0
= cm_xgb.astype('float') / cm_xgb.sum(axis=1)[:, np.newaxis]
cm = plt.subplots(figsize=(10, 10))
fig, ax = ax.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
im =ax)
ax.figure.colorbar(im, ax# We want to show all ticks...
set(xticks=np.arange(cm.shape[1]),
ax.=np.arange(cm.shape[0]),
yticks# ... and label them with the respective list entries
=classes, yticklabels=classes,
xticklabels='Normalized Confusion Matrix',
title='True label',
ylabel='Predicted label')
xlabel
# Rotate the tick labels and set their alignment.
=45, ha="right",
plt.setp(ax.get_xticklabels(), rotation="anchor")
rotation_mode
# Loop over data dimensions and create text annotations.
= '.2f'
fmt = cm.max() / 2.
thresh for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
format(cm[i, j], fmt),
ax.text(j, i, ="center", va="center",
ha="white" if cm[i, j] > thresh else "black")
color fig.tight_layout()
# predict on X_test to evaluate the model
= lgbm.predict(X_test)
preds = confusion_matrix(y_test, preds, labels=labels)
cm = 'light_gbm.sav'
model_name open(op.join(root_dir, model_name), 'wb')) pickle.dump(lgbm,
# match the pretrained model weight with the saved model above
= 'light_gbm.sav'
model_name
# helper function for calculating ND*I indices (bands in the final dimension)
def band_index(arr, a, b):
return np.expand_dims((arr[..., a] - arr[..., b]) / (arr[..., a] + arr[..., b]), axis=1)
= pickle.load(open(op.join(root_dir, model_name), 'rb')) lgbm
%%time
# open connections to our input and output images
# new_image = op.join(root_dir, 'Trans_nzoia_2019_10-04.tif')
= 'W__Home_hahmad_public_temp _project_landsat_train sample_sentinel_mosaic-Trans_Nzoia.tif'
raster_file = raster_file
new_image = op.join(root_dir, "lgbm_classification.tif")
output_image
with rasterio.open(new_image, 'r') as src:
= src.profile
profile
profile.update(=rasterio.uint8,
dtype=1,
count
)
with rasterio.open(output_image, 'w', **profile) as dst:
# perform prediction on each small image patch to minimize required memory
= 500
patch_size
for i in range((src.shape[0] // patch_size) + 1):
for j in range((src.shape[1] // patch_size) + 1):
# define the pixels to read (and write)
= rasterio.windows.Window(
window * patch_size,
j * patch_size,
i # don't read past the image bounds
min(patch_size, src.shape[1] - j * patch_size),
min(patch_size, src.shape[0] - i * patch_size)
)
= src.read(window=window)
data # read the image into the proper format, adding indices if necessary
= np.moveaxis(data, 0, 2)
img_swp = img_swp.reshape(-1, img_swp.shape[-1])
img_flat
= band_index(img_flat, 3, 2)
img_ndvi = band_index(img_flat, 1, 3)
img_ndwi
= np.concatenate([img_flat, img_ndvi, img_ndwi], axis=1)
img_w_ind
# remove no data values, store the indices for later use
# a later cell makes the assumption that all bands have identical no-data value arrangements
= np.ma.masked_invalid(img_w_ind)
m = img_w_ind[~m.mask].reshape(-1, img_w_ind.shape[-1])
to_predict
# predict
if not len(to_predict):
continue
= lgbm.predict(to_predict)
img_preds
# add the prediction back to the valid pixels (using only the first band of the mask to decide on validity)
# resize to the original image dimensions
= np.zeros(img_flat.shape[0])
output ~m.mask[:,0]] = img_preds.flatten()
output[= output.reshape(*img_swp.shape[:-1])
output
# create our final mask
= (~m.mask[:,0]).reshape(*img_swp.shape[:-1])
mask
# write to the final file
1, window=window)
dst.write(output.astype(rasterio.uint8), =window)
dst.write_mask(mask, window# write to the final file
1, window=window)
dst.write(output.astype(rasterio.uint8), =window) dst.write_mask(mask, window
CPU times: total: 52.4 s
Wall time: 25.3 s
%%time
# Load the classification
if os.path.exists(op.join(root_dir, "lgbm_classification.tif")):
= op.join(root_dir, "lgbm_classification.tif")
output_image else:
= 'lgbm_classification.tif'
output_image
def linear_rescale(image, in_range=(0, 1), out_range=(1, 255)):
= in_range
imin, imax = out_range
omin, omax = np.clip(image, imin, imax) - imin
image = image / np.float(imax - imin)
image return image * (omax - omin) + omin
with rasterio.open(output_image, 'r') as class_raster:
# show(class_raster)
= class_raster.read()
lgbm_classes
# Load the original image
with rasterio.open(raster_file, 'r') as s2_raster:
# show(s2_raster)
= s2_raster.read([1,2,3])
s2
# Compare side by side
= plt.subplots(ncols=2, nrows=1, figsize=(10, 4), sharey=True)
fig, (ax1, ax2) =class_raster.transform, ax=ax1, title="lgbm Classes")
show(lgbm_classes, transform2,1,0], : , :], transform=s2_raster.transform, adjust='linear', ax=ax2, title="Sentinel 2 RGB") show(s2[[
Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).
CPU times: total: 1.22 s
Wall time: 2.09 s
<AxesSubplot:title={'center':'Sentinel 2 RGB'}>
# Based on the histogram we'll set a threshold and rescale the data for visualization
# The values used for in_range are going to depend on the datatype and range of your data
# If you got the data from GEE try in_range(0, 2500)
for band in range(s2.shape[0]):
= linear_rescale(
s2[band]
s2[band], #in_range=(0, 0.25),
=(0, 2500),
in_range=[0, 255]
out_range
)= s2.astype(np.uint8) s2
<timed exec>:12: DeprecationWarning: `np.float` is a deprecated alias for the builtin `float`. To silence this warning, use `float` by itself. Doing this will not modify any behavior and is safe. If you specifically wanted the numpy scalar type, use `np.float64` here.
Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations
# Now retry the plot
= plt.subplots(ncols=2, nrows=1, figsize=(10, 4), sharey=True)
fig, (ax1, ax2) =class_raster.transform, ax=ax1, title="lgbm Classes")
show(lgbm_classes, transform2,1,0], : , :], transform=s2_raster.transform, adjust='linear', ax=ax2, title="Sentinel 2 RGB") show(s2[[
<AxesSubplot:title={'center':'Sentinel 2 RGB'}>
from matplotlib import colors
from matplotlib import cm
with rasterio.open(output_image, 'r') as rf_class_raster:
# show(class_raster)
= rf_class_raster.read()
rf_classes
= ['#ba0d11',
cls_colors
'#ffa718',
'#6fba4a',
'#2b83ba']
= colors.ListedColormap([colors.hex2color(x) for x in cls_colors])
clr_land
= plt.subplots(ncols=1, nrows=1, figsize=(12, 6), sharey=True)
fig, (ax1)
= ax1.imshow(lgbm_classes.squeeze(), cmap=cm.Accent_r)
im1
show(lgbm_classes, =class_raster.transform,
transform=ax1,
ax="lgbm Classes",
title=clr_land
cmap
)# Now make a colorbar
= colors.BoundaryNorm(np.arange(0,5), clr_land.N)
norm = plt.colorbar(cm.ScalarMappable(norm=norm, cmap=clr_land), ax=ax1, fraction=0.03)
cb +.8 for x in range(0,4)]) # move the marks to the middle
cb.set_ticks([xlist(class_dict.keys())) # label the colors cb.set_ticklabels(
0,4) np.arange(
array([0, 1, 2, 3])
class_dict
{'BarrendLand': 0, 'Builtup': 1, 'Forest': 2, 'Waterbodies': 3}